func runtime.add
81 uses
runtime (current package)
alg.go#L233: h = typehash(a.Elem, add(p, i*a.Elem.Size_), h)
alg.go#L242: h = typehash(f.Typ, add(p, f.Offset), h)
alg.go#L298: if err := mapKeyError2(a.Elem, add(p, i*a.Elem.Size_)); err != nil {
alg.go#L309: if err := mapKeyError2(f.Typ, add(p, f.Offset)); err != nil {
arena.go#L545: userArenaHeapBitsSetType(typ, add(ptr, uintptr(i)*typ.Size_), s)
cgocall.go#L622: p = add(p, at.Elem.Size_)
cgocall.go#L648: p = *(*unsafe.Pointer)(add(p, goarch.PtrSize))
cgocall.go#L671: p = add(p, st.Elem.Size_)
cgocall.go#L694: cgoCheckArg(f.Typ, add(p, f.Offset), true, top, msg)
cgocheck.go#L125: p = add(p, typ.Size_)
cgocheck.go#L157: src = add(src, skipBytes)
cgocheck.go#L172: v := *(*unsafe.Pointer)(add(src, i))
chan.go#L106: c.buf = add(unsafe.Pointer(c), hchanSize)
chan.go#L139: return add(c.buf, uintptr(i)*uintptr(c.elemsize))
checkptr.go#L42: end := add(ptr, size-1)
hash64.go#L29: a |= uintptr(*(*byte)(add(p, s>>1))) << 8
hash64.go#L30: a |= uintptr(*(*byte)(add(p, s-1))) << 16
hash64.go#L36: b = r4(add(p, s-4))
hash64.go#L42: b = r8(add(p, s-8))
hash64.go#L49: seed = mix(r8(p)^hashkey[1], r8(add(p, 8))^seed)
hash64.go#L50: seed1 = mix(r8(add(p, 16))^hashkey[2], r8(add(p, 24))^seed1)
hash64.go#L51: seed2 = mix(r8(add(p, 32))^hashkey[3], r8(add(p, 40))^seed2)
hash64.go#L52: p = add(p, 48)
hash64.go#L57: seed = mix(r8(p)^hashkey[1], r8(add(p, 8))^seed)
hash64.go#L58: p = add(p, 16)
hash64.go#L60: a = r8(add(p, l-16))
hash64.go#L61: b = r8(add(p, l-8))
iface.go#L114: p := (**itab)(add(unsafe.Pointer(&t.entries), h*goarch.PtrSize))
iface.go#L175: p := (**itab)(add(unsafe.Pointer(&t.entries), h*goarch.PtrSize))
iface.go#L215: xmhdr := (*[1 << 16]abi.Method)(add(unsafe.Pointer(x), uintptr(x.Moff)))[:nt:nt]
iface.go#L369: x = add(x, 6)
iface.go#L382: x = add(x, 4)
iface.go#L687: m := *(**itab)(add(unsafe.Pointer(&t.entries), i*goarch.PtrSize))
malloc.go#L1249: x = add(x, span.elemsize-size)
malloc.go#L1471: x = add(x, mallocHeaderSize)
mbitmap.go#L679: dst0 := (*uintptr)(add(dst, (i+0)*goarch.PtrSize))
mbitmap.go#L680: dst1 := (*uintptr)(add(dst, (i+1)*goarch.PtrSize))
mbitmap.go#L685: dst := (*uintptr)(add(dst, i*goarch.PtrSize))
mgcmark.go#L278: ptrmask := (*uint8)(add(unsafe.Pointer(ptrmask0), uintptr(shard)*(rootBlockBytes/(8*goarch.PtrSize))))
mgcsweep.go#L957: *(*uint32)(add(x, i)) = 0xdeadbeef
mpagealloc_64bit.go#L122: offAddr{uintptr(add(base, baseOffset))},
mpagealloc_64bit.go#L123: offAddr{uintptr(add(base, limitOffset))},
mprof.go#L248: stk := (*[maxProfStackDepth]uintptr)(add(unsafe.Pointer(b), unsafe.Sizeof(*b)))
mprof.go#L261: data := add(unsafe.Pointer(b), unsafe.Sizeof(*b)+b.nstk*unsafe.Sizeof(uintptr(0)))
mprof.go#L270: data := add(unsafe.Pointer(b), unsafe.Sizeof(*b)+b.nstk*unsafe.Sizeof(uintptr(0)))
mspanset.go#L299: return (*atomic.Pointer[spanSetBlock])(add(s.p, goarch.PtrSize*idx))
netpoll.go#L700: pd := (*pollDesc)(add(mem, i*pdSize))
os_linux.go#L251: auxvp := (*[1 << 28]uintptr)(add(unsafe.Pointer(argv), uintptr(n)*goarch.PtrSize))
panic.go#L697: fd = add(fd, unsafe.Sizeof(b))
panic.go#L838: p.deferBitsPtr = (*byte)(add(sp, s.deferBitsOffset))
panic.go#L839: p.slotsPtr = add(sp, s.slotsOffset)
panic.go#L879: p.argp = add(p.startSP, sys.MinFrameSize)
panic.go#L905: return *(*func())(add(p.slotsPtr, i*goarch.PtrSize)), true
panic.go#L993: deferBitsPtr := (*uint8)(add(varp, -uintptr(deferBitsOffset)))
panic.go#L1002: p.slotsPtr = add(varp, -uintptr(slotsOffset))
proc.go#L698: return *(**g)(add(unsafe.Pointer(ptr), i*goarch.PtrSize))
proc.go#L7367: firstFunc := add(unsafe.Pointer(t), 8)
proc.go#L7369: p := add(firstFunc, uintptr(i)*goarch.PtrSize)
runtime1.go#L63: return *(**byte)(add(unsafe.Pointer(argv), uintptr(i)*goarch.PtrSize))
signal_linux_amd64.go#L55: *(*uintptr)(add(unsafe.Pointer(c.info), 2*goarch.PtrSize)) = uintptr(x)
slice.go#L59: memclrNoHeapPointers(add(to, copymem), tomem-copymem)
slice.go#L269: memclrNoHeapPointers(add(p, newlenmem), capmem-newlenmem)
slice.go#L344: memclrNoHeapPointers(add(new.array, oldcapmem), newlenmem-oldcapmem)
stack.go#L625: print(" ", add(scanp, (i+j)*goarch.PtrSize), ":", ptrnames[bv.ptrbit(i+j)], ":", hex(*(*uintptr)(add(scanp, (i+j)*goarch.PtrSize))), " # ", i, " ", *addb(bv.bytedata, i/8), "\n")
stack.go#L632: pp := (*uintptr)(add(scanp, (i+j)*goarch.PtrSize))
stkframe.go#L248: p = add(p, goarch.PtrSize)
string.go#L147: p = add(p, 7)
string.go#L320: memclrNoHeapPointers(add(p, uintptr(size)), cap-uintptr(size))
string.go#L335: memclrNoHeapPointers(add(p, uintptr(size)*4), mem-uintptr(size)*4)
stubs.go#L24: func add(p unsafe.Pointer, x uintptr) unsafe.Pointer {
symtab.go#L910: ffb := (*findfuncbucket)(add(unsafe.Pointer(datap.findfunctab), b*unsafe.Sizeof(findfuncbucket{})))
symtab.go#L1200: return *(*uint32)(add(unsafe.Pointer(&f.nfuncdata), unsafe.Sizeof(f.nfuncdata)+uintptr(table)*4))
traceback.go#L676: bits := *(*uint8)(add(liveInfo, uintptr(liveIdx)+uintptr(slotIdx/8)))
traceback.go#L681: x := readUnaligned64(add(argp, uintptr(off)))
type.go#L108: addr = add(addr, firstmoduledata.data-aixStaticDataBase)
vdso_linux.go#L115: pt := (*elfPhdr)(add(pt, uintptr(i)*unsafe.Sizeof(elfPhdr{})))
vdso_linux.go#L196: aux := (*elfVerdaux)(add(unsafe.Pointer(def), uintptr(def.vd_aux)))
vdso_linux.go#L205: def = (*elfVerdef)(add(unsafe.Pointer(def), uintptr(def.vd_next)))